1 Environment and datasets

1.1 Setup environment

library(NNbenchmark)
library(kableExtra)
library(dplyr)   
library(stringr) 
options(scipen = 999)
if(dir.exists("D:/GSoC2020/Results/2020run04/"))
{  
  odir <- "D:/GSoC2020/Results/2020run04/"
}else
  odir <- "../results_2020_gsoc2020/"

2 Read csv files and calculate some statistics for the metrics

resultfile <- list.files(odir, pattern = "-results.csv", full.names = TRUE)

nonlargeresult <- grep("Wood", resultfile, invert = TRUE, value=TRUE)
lf   <- lapply(nonlargeresult, csv::as.csv)
names(lf) <- names(NNdatasets)
#lf <- lf[c(1:4,6,7,10,12)]
gfr <- lapply(lf, function(dfr) cbind(
  ds   = str_remove(str_extract(dfr$event, "\\w+_"), "_"),
  pfa  = str_sub(str_remove(dfr$event, str_extract(dfr$event, "\\w+_")),  1, -4),
  run  = str_sub(dfr$event, -2, -1),
  dfr[,c("RMSE","MAE","WAE","time")]
))

yfr <- lapply(gfr, function(dfr) {
  as.data.frame(dfr %>%
                  group_by(pfa) %>%
                  summarise(time.mean = mean(time), 
                            RMSE.min = min(RMSE), 
                            RMSE.med = median(RMSE),
                            RMSE.d51 = median(RMSE) - min(RMSE),
                            MAE.med  = median(MAE),
                            WAE.med  = median(WAE)
                  )
  )})
yfr <- lapply(yfr, function(dfr) transform(dfr, npfa = 1:nrow(dfr)))

2.1 Make csv tables for summaries per package and per dataset

Those csv are used in the supplementary materials of the paper.

for(j in 1:length(yfr))
  write.csv(yfr[[j]], file=paste0(odir, names(yfr)[j], "-result-summary.csv"), row.names = FALSE)

3 Ranks and scores

3.1 Calculate ranks per datasets and merge results

rankMOFtime <- function(dfr) {
  dfrtime <- dfr[order(dfr$time.mean),]
  dfrRMSE <- dfr[order(dfr$RMSE.min, dfr$time.mean, dfr$RMSE.med),]
  dfrRMSEmed  <- dfr[order(dfr$RMSE.med, dfr$RMSE.min, dfr$time.mean),]
  dfrRMSEd51  <- dfr[order(dfr$RMSE.d51),]
  dfrMAE      <- dfr[order(dfr$MAE.med),]
  dfrWAE      <- dfr[order(dfr$WAE.med),]
  transform(dfr, 
            time.rank = order(dfrtime$npfa),
            RMSE.rank = order(dfrRMSE$npfa),
            RMSEmed.rank  = order(dfrRMSEmed$npfa),
            RMSEd51.rank  = order(dfrRMSEd51$npfa),
            MAE.rank = order(dfrMAE$npfa),
            WAE.rank = order(dfrWAE$npfa)
  )
}
sfr     <- lapply(yfr, rankMOFtime)
sfrwide <- do.call(cbind, sfr)

3.2 Global scores on combined datasets (final table)

sfr.time   <- sfrwide[, c(grep("time.rank", colnames(sfrwide)))]
time.score <- rank(apply(sfr.time, 1, sum), ties.method = "min")
sfr.RMSE       <- sfrwide[, c(grep("RMSE.rank", colnames(sfrwide)))]
RMSE.score     <- rank(apply(sfr.RMSE, 1, sum), ties.method = "min")
sfr.RMSEmed    <- sfrwide[, c(grep("RMSEmed.rank", colnames(sfrwide)))]
RMSEmed.score  <- rank(apply(sfr.RMSEmed, 1, sum), ties.method = "min")
sfr.RMSEd51    <- sfrwide[, c(grep("RMSEd51.rank", colnames(sfrwide)))]
RMSEd51.score  <- rank(apply(sfr.RMSEd51, 1, sum), ties.method = "min")
sfr.MAE       <- sfrwide[, c(grep("MAE.rank", colnames(sfrwide)))]
MAE.score     <- rank(apply(sfr.MAE, 1, sum), ties.method = "min")
sfr.WAE       <- sfrwide[, c(grep("WAE.rank", colnames(sfrwide)))]
WAE.score     <- rank(apply(sfr.WAE, 1, sum), ties.method = "min")

scoredfr0 <- data.frame(sfr$mDette[,"pfa",drop=FALSE], 
                        # scoredfr0 <- data.frame(sfr$uNeuroOne[,c("pfa")], 
                        time.score, 
                        RMSE.score, 
                        RMSEmed.score,
                        RMSEd51.score,
                        MAE.score,
                        WAE.score)

scoredfr <- scoredfr0[order(scoredfr0$RMSE.score),]
rownames(scoredfr) <- NULL

kable(scoredfr)%>%
  kable_styling(bootstrap_options = c("striped", "hover", "condensed"))
pfa time.score RMSE.score RMSEmed.score RMSEd51.score MAE.score WAE.score
nlsr::nlxb_none 20 1 2 10 2 4
rminer::fit_none 14 2 1 6 1 1
nnet::nnet_none 3 3 2 17 4 5
MachineShop::fit_none 6 4 8 20 8 8
validann::ann_BFGS 36 5 4 9 3 3
traineR::train.nnet_none 4 6 5 14 5 2
radiant.model::nn_none 10 7 10 32 12 11
validann::ann_CG 60 8 6 13 6 7
CaDENCE::cadence.fit_optim 46 9 26 48 19 32
brnn::brnn_Gauss-Newton 7 10 14 10 16 14
caret::avNNet_none 16 11 9 21 9 9
h2o::h2o.deeplearning_first-order 49 12 7 7 7 6
validann::ann_L-BFGS-B 37 13 13 35 15 13
EnsembleBase::Regression.Batch.Fit_none 5 14 15 28 14 15
monmlp::monmlp.fit_BFGS 26 15 11 19 10 12
qrnn::qrnn.fit_none 27 16 18 25 11 31
automl::automl_train_manual_trainwgrad_adam 48 17 19 34 17 18
minpack.lm::nlsLM_none 14 17 12 5 13 10
RSNNS::mlp_Rprop 24 19 28 52 26 30
deepnet::nn.train_BP 22 20 17 36 20 17
RSNNS::mlp_SCG 31 21 20 27 21 22
neuralnet::neuralnet_rprop- 19 22 22 45 23 23
keras::fit_adamax 50 23 16 23 17 16
neuralnet::neuralnet_rprop+ 18 24 23 47 24 25
RSNNS::mlp_Std_Backpropagation 23 25 24 23 25 26
RSNNS::mlp_BackpropChunk 27 26 31 37 30 28
automl::automl_train_manual_trainwgrad_RMSprop 47 27 30 44 32 29
RSNNS::mlp_BackpropWeightDecay 29 28 21 31 22 19
neuralnet::neuralnet_sag 40 29 46 59 45 50
RSNNS::mlp_BackpropMomentum 25 30 24 26 26 21
keras::fit_adam 43 31 27 42 28 20
neuralnet::neuralnet_slr 30 32 35 39 36 41
ANN2::neuralnetwork_rmsprop 13 33 29 33 30 24
AMORE::train_ADAPTgdwm 16 34 33 40 29 36
ANN2::neuralnetwork_adam 12 35 32 30 33 27
keras::fit_nadam 44 36 37 55 38 41
keras::fit_adagrad 58 37 43 51 42 38
AMORE::train_ADAPTgd 9 38 34 12 35 33
automl::automl_train_manual_trainwpso 57 39 42 49 41 40
keras::fit_adadelta 59 40 36 18 34 34
validann::ann_Nelder-Mead 56 41 43 46 44 43
AMORE::train_BATCHgd 39 42 40 28 43 35
AMORE::train_BATCHgdwm 41 43 37 15 40 37
keras::fit_sgd 51 44 47 43 48 46
ANN2::neuralnetwork_sgd 10 45 40 22 39 39
deepdive::deepnet_adam 33 46 39 1 37 44
neuralnet::neuralnet_backprop 35 47 45 16 45 45
monmlp::monmlp.fit_Nelder-Mead 32 48 49 50 47 47
keras::fit_rmsprop 38 49 54 58 54 54
CaDENCE::cadence.fit_Rprop 55 50 55 60 52 56
deepdive::deepnet_rmsProp 34 51 48 4 49 48
RSNNS::mlp_BackpropBatch 42 52 51 41 51 51
snnR::snnR_none 8 53 50 8 50 49
validann::ann_SANN 21 54 52 53 53 53
CaDENCE::cadence.fit_psoptim 53 55 56 53 56 57
deepdive::deepnet_momentum 54 56 53 3 55 52
RSNNS::mlp_Quickprop 44 57 58 38 57 58
elmNNRcpp::elm_train_extremeML 1 58 59 57 59 59
deepdive::deepnet_gradientDescent 52 59 57 2 58 55
ELMR::OSelm_train.formula_extremeML 2 60 60 56 60 60

4 Figures

4.1 Score density per package

rkperalgo <- sfrwide[order(scoredfr0$RMSE.score),c(1, grep("RMSE.rank", colnames(sfrwide)))]

pkgname <- sapply(strsplit(rkperalgo$mDette.pfa, "::"), head, n=1)
n <- NROW(rkperalgo)
rkproba <- sapply(1:n, function(j)
  sapply(1:n, function(r) mean(as.numeric(rkperalgo[j, -1]) == r))
)


colnames(rkproba) <- paste0(pkgname, ".", rownames(rkperalgo))

BandW <- c("white", "grey90", "grey70", "grey50", "grey30", "grey10")

#png(paste0(odir, "/","scoreprobperpkgBnW.png"), width = 800, height = 800)
reshtm <- heatmap(rkproba, Rowv=NA, Colv=NA, xlab="Package:Algorithm", ylab="RMSE score", 
                  main="Score probabilities over 12 packages", margins = c(6, 3), scale="none",
                  col=BandW)
legend("topleft", fill = BandW, leg=0:5/5)

#dev.off()

4.2 Comparison of global scores and RMSE value per dataset

## =====================================
## GLOBAL SCORE APPLIED TO EVERY DATASET
## =====================================
merge_sfr_dfr <- function(x, y) {
  z <- cbind(
    x[,c("npfa","pfa","time.mean","RMSE.min","time.rank","RMSE.rank")], 
    y[,c("time.score","RMSE.score")]
  )
  z[order(z$RMSE.score),]
}
zfr <- lapply(sfr, merge_sfr_dfr, y = scoredfr0)
#str(zfr)
#str(sfr)

## =========================
## GRAPHIC RMSEscore_RMSEmin
## =========================
op <- par(mfrow = c(length(yfr)/2,2), las = 1, mar = c(0,4,0,0), oma = c(1,1,3,1))
for (j in seq_along(zfr)) {
  names(zfr)[j]
  plot(log1p(zfr[[j]][, "RMSE.score"]), log1p(zfr[[j]][, "RMSE.min"]),
       xlab = "RMSE.score (log1p)", ylab = "RMSE.min (log1p)", # main = names(zfr)[j], 
       las = 1, col = 0, xaxt = "n")
  mtext(names(zfr)[j], line = -1.2, cex = 0.8)
  text(log1p(zfr[[j]][, "RMSE.score"]), log1p(zfr[[j]][, "RMSE.min"]),
       labels = zfr[[j]][, "RMSE.score"])
}
mtext("x=log1p(RMSE.score) (global)   y=log1p(RMSE.min) (per dataset)", outer = TRUE, line = 1)

op <- par(mfrow = c(length(yfr)/2,2), las = 1, mar = c(0,4,0,0), oma = c(1,1,3,1))
for (j in seq_along(zfr)) {
  names(zfr)[j]
  plot(zfr[[j]][, "RMSE.score"], zfr[[j]][, "RMSE.min"],
       xlab = "RMSE.score", ylab = "RMSE.min", # main = names(zfr)[j], 
       las = 1, col=0, xaxt = "n", pch=as.character(zfr[[j]][, "RMSE.score"]))
  mtext(names(zfr)[j], line = -1.2, cex = 0.8)
  text(zfr[[j]][, "RMSE.score"], zfr[[j]][, "RMSE.min"],
       labels = zfr[[j]][, "RMSE.score"])
  
}
mtext("x=RMSE.score (global)   y=RMSE.min (per dataset)", outer = TRUE, line = 1)

4.3 Comparison of global scores and time mean per dataset

## ==============================
## GRAPHIC RMSEscore_timemean
## ==============================
op <- par(mfrow = c(length(yfr)/2,2), las = 1, mar = c(0,0,0,0), oma = c(1,1,3,1))
for (j in seq_along(zfr)) {
  names(zfr)[j]
  plot(log1p(zfr[[j]][, "RMSE.score"]), log1p(zfr[[j]][, "time.mean"]),
       xlab = "RMSE.score (log1p)", ylab = "time.mean (log1p)", 
       las = 1, col = 0, xaxt = "n", yaxt = "n")
  mtext(names(zfr)[j], line = -1.2, cex = 0.8)
  text(log1p(zfr[[j]][, "RMSE.score"]), log1p(zfr[[j]][, "time.mean"]),
       labels = zfr[[j]][, "RMSE.score"])
}
mtext("x=log1p(RMSE.score) (global)   y=log1p(time.mean) (per dataset)", outer = TRUE, line = 1)

op <- par(mfrow = c(length(yfr)/2,2), las = 1, mar = c(0,4,0,0), oma = c(1,1,3,1))
for (j in seq_along(zfr)) {
  names(zfr)[j]
  plot(zfr[[j]][, "RMSE.score"], zfr[[j]][, "time.mean"],
       xlab = "RMSE.score", ylab = "time.mean", 
       las = 1, col = 0, xaxt = "n")
  mtext(names(zfr)[j], line = -1.2, cex = 0.8)
  text(zfr[[j]][, "RMSE.score"], zfr[[j]][, "time.mean"],
       labels = zfr[[j]][, "RMSE.score"])
}
mtext("x=RMSE.score (global)   y=time.mean (per dataset)", outer = TRUE, line = 1)

4.4 By different number of algorithms

## =======================================
## GRAPHIC RMSEmin_timemean - 49 algos
## =======================================
op <- par(mfrow = c(length(yfr)/2,2), las = 1, mar = c(0,0,0,0), oma = c(1,1,3,1))
for (j in seq_along(zfr)) {
  names(zfr)[j]
  plot(log1p(zfr[[j]][, "RMSE.min"]), log1p(zfr[[j]][, "time.mean"]),
       xlab = "RMSE.min", ylab = "time.mean", #Â main = names(zfr)[j], 
       las = 1, col = 0, xaxt = "n", yaxt = "n")
  mtext(names(zfr)[j], line = -1.2, cex = 0.8)
  text(log1p(zfr[[j]][, "RMSE.min"]), log1p(zfr[[j]][, "time.mean"]),
       labels = zfr[[j]][, "RMSE.score"])
}
mtext("x=RMSE.min (per dataset)   y=time.mean (per dataset)    49 algos", outer = TRUE, line = 1)

## =======================================
## GRAPHIC RMSEmin_timemean - 12 algos
## =======================================
op <- par(mfrow = c(length(yfr)/2,2), las = 1, mar = c(0,0,0,0), oma = c(1,1,3,1))
for (j in seq_along(zfr)) {
  names(zfr)[j]
  plot(log1p(zfr[[j]][1:12, "RMSE.min"]), log1p(zfr[[j]][1:12, "time.mean"]),
       xlab = "RMSE.min", ylab = "time.mean", #Â main = names(zfr)[j], 
       las = 1, col = 0, xaxt = "n", yaxt = "n")
  mtext(names(zfr)[j], line = -1.2, cex = 0.8)
  text(log1p(zfr[[j]][1:12, "RMSE.min"]), log1p(zfr[[j]][1:12, "time.mean"]),
       labels = zfr[[j]][1:12, "RMSE.score"])
}
mtext("x=RMSE.min (per dataset)   y=time.mean (per dataset)    12 algos", outer = TRUE, line = 1)

## =======================================
## GRAPHIC RMSEmin_timemean - 09 algos
## =======================================
op <- par(mfrow = c(length(yfr)/2,2), las = 1, mar = c(0,0,0,0), oma = c(1,1,3,1))
for (j in seq_along(zfr)) {
  names(zfr)[j]
  plot(log1p(zfr[[j]][1:9, "RMSE.min"]), log1p(zfr[[j]][1:9, "time.mean"]),
       xlab = "RMSE.min", ylab = "time.mean", #Â main = names(zfr)[j], 
       las = 1, col = 0, xaxt = "n", yaxt = "n")
  mtext(names(zfr)[j], line = -1.2, cex = 0.8)
  text(log1p(zfr[[j]][1:9, "RMSE.min"]), log1p(zfr[[j]][1:9, "time.mean"]),
       labels = zfr[[j]][1:9, "RMSE.score"])
}
mtext("x=RMSE.min (per dataset)   y=time.mean (per dataset)    9 algos", outer = TRUE, line = 1)

## THE END
## THE END

4.5 Final graphics for article

myscore <- c(rep(2, 10), rep(1, NROW(zfr[[1]])-10))
myds <- seq_along(zfr)[names(zfr) %in% c("mIshigami", "uDreyfus1")]

png("mIshigami-uDreyfus1-RMSEmin.png", width = 1000, height = 500)
op <- par(mfrow = c(1,2), las = 1, mar = c(0,3,0,0), oma = c(1,1,3,2))
for (j in myds) {
  
  plot(cumsum(myscore), zfr[[j]][, "RMSE.min"],
       xlab = "RMSE.score", ylab = "RMSE.min", 
       ylim=c(.9*min(zfr[[j]][, "RMSE.min"]), 1.1*max(zfr[[j]][, "RMSE.min"])),
       las = 1, col=0, xaxt = "n", pch=as.character(zfr[[j]][, "RMSE.score"]))
  mtext(names(zfr)[j], line = -1.2, cex = 1.2)
  text(cumsum(myscore), zfr[[j]][, "RMSE.min"],
       labels = zfr[[j]][, "RMSE.score"])
  grid()
}
mtext("RMSE.min (per dataset) against RMSE.score (global)", outer = TRUE, line = 1)
dev.off()
## quartz_off_screen 
##                 2
myscore <- c(rep(2, 10), rep(1, NROW(zfr[[1]])-30), rep(2, 20))
myscore <- rep(2, NROW(zfr[[1]]))

png("mIshigami-uDreyfus1-timmean.png", width = 1000, height = 500)
op <- par(mfrow = c(1,2), las = 1, mar = c(3,3,0,0), oma = c(1,1,3,2))
for (j in myds) 
{
  
  xval <- cumsum(myscore)/2
  yval <- zfr[[j]][, "time.mean"]
  
  plot(xval, yval, xlab = "RMSE.score", ylab = "time.mean", 
       ylim=c(.9*min(yval), 1.1*max(yval)), 
       col=1, pch=as.character(zfr[[j]][, "RMSE.score"]),
       type="n", xaxp=c(0,max(xval), 3))
  mtext(names(zfr)[j], line = -1.2, cex = 1.2)
  mtext("RMSE score", line = -20)
  text(xval, yval,
       labels = zfr[[j]][, "RMSE.score"])
  grid()
}
mtext("Mean time (per dataset) against RMSE score (global)", outer = TRUE, line = 1)
mtext("Time", side=2, outer=TRUE, adj=0, line=1)

dev.off()
## quartz_off_screen 
##                 2